import os
import plotly.colors
import polyflexmd.data_analysis.data.read as read
import polyflexmd.data_analysis.transform.transform as transform
import polyflexmd.data_analysis.theory.kremer_grest as kremer_grest
import polyflexmd.data_analysis.theory.rouse as rouse
import polyflexmd.data_analysis.plot.plot_system as plot_system
import polyflexmd.data_analysis.data.constants as data_constants
import polyflexmd.data_analysis.pipelines.trajectory as pipeline_process
import polyflexmd.experiment_runner.config as config
import polyflexmd.data_analysis.plot.msd
import pandas as pd
import pathlib
import matplotlib.pyplot as plt
import numpy as np
from pandarallel import pandarallel
import seaborn as sns
import scipy.optimize
import functools
import matplotlib.transforms
%load_ext autoreload
%autoreload 2
sns.set_style("darkgrid")
sns.set_style("darkgrid")
SMALL_SIZE = 12
MEDIUM_SIZE = 14
BIGGER_SIZE = 16
plt.rc('axes', titlesize=MEDIUM_SIZE) # fontsize of the axes title
plt.rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels
plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick label
plt.rc('legend', fontsize=SMALL_SIZE) # legend fontsize
plt.rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title
plt.rc('figure', labelsize=BIGGER_SIZE) # fontsize of the figure title
PATH_EXPERIMENT = "/beegfs/ws/0/s4610340-polyflexmd/data/experiment_results/FENE-beadspring-vary-l_K/4-FENE-beadspring-vary-l_K/538accb2"
NAME_EC = "4-FENE-beadspring-vary-l_K.toml"
EQUILIBRIUM_ONLY = True
CONTINUE = False
CONTINUE_t_equilibrium = 58000000
N_WORKERS = 16
CHECK_BOND_LENGTHS = False
CONTINUE = bool(CONTINUE)
CONTINUE_t_equilibrium = int(CONTINUE_t_equilibrium)
EQUILIBRIUM_ONLY = bool(EQUILIBRIUM_ONLY)
CHECK_BOND_LENGTHS = bool(CHECK_BOND_LENGTHS)
PATH_EXPERIMENT = pathlib.Path(PATH_EXPERIMENT)
PATH_SYSTEM_DEF = PATH_EXPERIMENT / "data/initial_system.data"
PATH_EC = PATH_EXPERIMENT / NAME_EC
N_WORKERS = int(N_WORKERS)
conf = config.read_experiment_config(PATH_EC)
conf
ExperimentConfig(simulation_config=SimulationConfig(job=SlurmJobConfig(account='p_mdpolymer', time='96:00:00', partition='romeo', nodes=8, tasks_per_node=125, ntasks=1000, cpus_per_task=1, mem_per_cpu=1000), lammps_executable='/scratch/ws/0/s4610340-bt-eea1-md-workspace/bin/lammps-patch_23Jun2022_update4/lmp_omp_romeo_opt', lmod_modules='modenv/hiera GCC/11.3.0 OpenMPI/4.1.4 Python/3.9.6', simulation_model_path=PosixPath('simulations/FENE-beadspring-vary-l_K.lammps'), experiments_path=PosixPath('/beegfs/ws/0/s4610340-polyflexmd/data/experiment_results'), n_partitions=8, n_tasks_per_partition=125, variables={'kappa_start': 1.0, 'kappa_delta': 10, 'kappa_n_values': 8, 'n_relax_steps': 30000000, 'n_equilibrium_steps_1': 2000000, 'dump_frequency_1': 100, 'n_equilibrium_steps_2': 33000000, 'dump_frequency_2': 10000}), initial_system_config=SystemCreatorConfig(system_type='create', job=SlurmJobConfig(account='p_mdpolymer', time='1:00:00', partition='romeo', nodes=1, tasks_per_node=1, ntasks=1, cpus_per_task=2, mem_per_cpu=2000), venv_path=PosixPath('/beegfs/ws/0/s4610340-polyflexmd/polyflexmd/.venv'), system_config=AnchoredFENEChainConfig(name='anchored-fene-rod', n_chains=500, n_monomers=64, monomer_type=2, bond_type=1, angle_type=1, bond_length=0.97, box_length=200, seed=133)), report_config=ReportConfig(job=SlurmJobConfig(account='p_mdpolymer', time='24:00:00', partition='romeo', nodes=1, tasks_per_node=1, ntasks=1, cpus_per_task=64, mem_per_cpu=7000), venv_path=PosixPath('/beegfs/ws/0/s4610340-polyflexmd/polyflexmd/.venv'), notebook=PosixPath('notebooks/3-FENE-vary-l_K.ipynb'), kernel='polyflexmd', notebook_params={'CONTINUE': 'False', 'EQUILIBRIUM_ONLY': 'True'}), data_processing_config=None)
kappas: list[float] = [
conf.simulation_config.variables["kappa_start"] + conf.simulation_config.variables["kappa_delta"] * i
for i in range(conf.simulation_config.variables["kappa_n_values"])
]
kappas
[1.0, 11.0, 21.0, 31.0, 41.0, 51.0, 61.0, 71.0]
system = read.read_lammps_system_data(PATH_SYSTEM_DEF)
system.atoms
| molecule-ID | type | x | y | z | ix | iy | iz | |
|---|---|---|---|---|---|---|---|---|
| 1 | 1 | 1 | 0.000000 | 0.000000 | 0.000000 | 0 | 0 | 0 |
| 2 | 1 | 1 | -0.336178 | 0.186169 | 0.890632 | 0 | 0 | 0 |
| 3 | 1 | 2 | -0.672356 | 0.372339 | 1.781264 | 0 | 0 | 0 |
| 4 | 1 | 2 | -1.008534 | 0.558508 | 2.671896 | 0 | 0 | 0 |
| 5 | 1 | 2 | -1.344712 | 0.744678 | 3.562528 | 0 | 0 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 31996 | 500 | 2 | 25.290807 | -50.942120 | -6.367762 | 0 | 0 | 0 |
| 31997 | 500 | 2 | 25.719465 | -51.805546 | -6.475690 | 0 | 0 | 0 |
| 31998 | 500 | 2 | 26.148122 | -52.668972 | -6.583618 | 0 | 0 | 0 |
| 31999 | 500 | 2 | 26.576780 | -53.532397 | -6.691547 | 0 | 0 | 0 |
| 32000 | 500 | 3 | 27.005438 | -54.395823 | -6.799475 | 0 | 0 | 0 |
32000 rows × 8 columns
system.box
-100.000000 100.000000 xlo xhi -100.000000 100.000000 ylo yhi -100.000000 100.000000 zlo zhi
PATH_DATA_PROCESSED = PATH_EXPERIMENT / "data" / "processed"
if CONTINUE:
PATH_DATA_PROCESSED = PATH_DATA_PROCESSED / "continue"
PATH_DATA_PROCESSED
PosixPath('/beegfs/ws/0/s4610340-polyflexmd/data/experiment_results/FENE-beadspring-vary-l_K/4-FENE-beadspring-vary-l_K/538accb2/data/processed')
PATH_DF_MAIN_AXIS = PATH_DATA_PROCESSED / "main_axis.csv"
df_main_axis = pd.read_csv(PATH_DF_MAIN_AXIS).groupby("molecule-ID").nth(1)
df_main_axis
| molecule-ID | type | x | y | z | ix | iy | iz | |
|---|---|---|---|---|---|---|---|---|
| 1 | 1 | 1 | -0.336178 | 0.186169 | 0.890632 | 0 | 0 | 0 |
| 3 | 2 | 1 | -0.272215 | -0.119898 | 0.923268 | 0 | 0 | 0 |
| 5 | 3 | 1 | -0.517323 | -0.532615 | -0.624178 | 0 | 0 | 0 |
| 7 | 4 | 1 | 0.302546 | -0.396453 | 0.831980 | 0 | 0 | 0 |
| 9 | 5 | 1 | -0.592061 | 0.192979 | 0.743722 | 0 | 0 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 991 | 496 | 1 | 0.301950 | -0.674609 | -0.628195 | 0 | 0 | 0 |
| 993 | 497 | 1 | -0.010688 | 0.215110 | -0.945787 | 0 | 0 | 0 |
| 995 | 498 | 1 | -0.072238 | 0.104175 | 0.961680 | 0 | 0 | 0 |
| 997 | 499 | 1 | -0.396090 | -0.883587 | 0.057330 | 0 | 0 | 0 |
| 999 | 500 | 1 | 0.428658 | -0.863426 | -0.107928 | 0 | 0 | 0 |
500 rows × 8 columns
l_b_src = .97
L_src = l_b_src * (conf.initial_system_config.system_config.n_monomers - 1)
L_src
61.11
kappa_colors = sns.color_palette("viridis", n_colors=len(kappas))
kappa_colors
PATH_ETE = PATH_DATA_PROCESSED / "ete.csv"
df_ete = pd.read_csv(PATH_ETE, index_col=["kappa", "molecule-ID", "t"])
df_ete
| R_x | R_y | R_z | R | |||
|---|---|---|---|---|---|---|
| kappa | molecule-ID | t | ||||
| 1.0 | 1 | 30000000 | 4.653390 | 7.51274 | 7.235560 | 11.421411 |
| 2 | 30000000 | 5.183050 | 2.47633 | 9.489370 | 11.092537 | |
| 3 | 30000000 | 4.879620 | -1.24165 | 8.614230 | 9.977843 | |
| 4 | 30000000 | 0.573486 | -4.00533 | 3.909040 | 5.626024 | |
| 5 | 30000000 | -2.084040 | -6.83918 | 0.227633 | 7.153281 | |
| ... | ... | ... | ... | ... | ... | ... |
| 71.0 | 496 | 65000000 | 32.172900 | -46.69850 | -10.628500 | 57.695847 |
| 497 | 65000000 | 19.717800 | 37.38680 | -38.115400 | 56.915268 | |
| 498 | 65000000 | -42.031100 | -7.32022 | 25.061500 | 49.480076 | |
| 499 | 65000000 | -25.478400 | -50.98580 | 15.163900 | 58.980034 | |
| 500 | 65000000 | 46.443500 | -29.78540 | 7.322810 | 55.657818 |
93204000 rows × 4 columns
df_ete["t/LJ"] = df_ete.index.get_level_values("t").map(lambda x: x * 0.0025)
df_ete["R^2"] = df_ete["R"] ** 2
df_ete
| R_x | R_y | R_z | R | t/LJ | R^2 | |||
|---|---|---|---|---|---|---|---|---|
| kappa | molecule-ID | t | ||||||
| 1.0 | 1 | 30000000 | 4.653390 | 7.51274 | 7.235560 | 11.421411 | 75000.0 | 130.448641 |
| 2 | 30000000 | 5.183050 | 2.47633 | 9.489370 | 11.092537 | 75000.0 | 123.044377 | |
| 3 | 30000000 | 4.879620 | -1.24165 | 8.614230 | 9.977843 | 75000.0 | 99.557351 | |
| 4 | 30000000 | 0.573486 | -4.00533 | 3.909040 | 5.626024 | 75000.0 | 31.652148 | |
| 5 | 30000000 | -2.084040 | -6.83918 | 0.227633 | 7.153281 | 75000.0 | 51.169425 | |
| ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 71.0 | 496 | 65000000 | 32.172900 | -46.69850 | -10.628500 | 57.695847 | 162500.0 | 3328.810761 |
| 497 | 65000000 | 19.717800 | 37.38680 | -38.115400 | 56.915268 | 162500.0 | 3239.347732 | |
| 498 | 65000000 | -42.031100 | -7.32022 | 25.061500 | 49.480076 | 162500.0 | 2448.277921 | |
| 499 | 65000000 | -25.478400 | -50.98580 | 15.163900 | 58.980034 | 162500.0 | 3478.644411 | |
| 500 | 65000000 | 46.443500 | -29.78540 | 7.322810 | 55.657818 | 162500.0 | 3097.792705 |
93204000 rows × 6 columns
df_ete_mean = df_ete.groupby(["kappa", "t"])[["R^2"]].mean()
df_ete_mean["l_K"] = kremer_grest.bare_kuhn_length(
np.array(df_ete_mean.index.get_level_values("kappa")),
l_b=l_b_src
)
df_ete_mean
| R^2 | l_K | ||
|---|---|---|---|
| kappa | t | ||
| 1.0 | 30000000 | 121.753577 | 1.854017 |
| 30000100 | 122.022246 | 1.854017 | |
| 30000200 | 122.315529 | 1.854017 | |
| 30000300 | 122.462672 | 1.854017 | |
| 30000400 | 122.593975 | 1.854017 | |
| ... | ... | ... | ... |
| 71.0 | 64960000 | 2825.152344 | 136.770000 |
| 64970000 | 2824.566295 | 136.770000 | |
| 64980000 | 2816.356893 | 136.770000 | |
| 64990000 | 2804.853668 | 136.770000 | |
| 65000000 | 2797.986697 | 136.770000 |
186408 rows × 2 columns
ax: plt.Axes
fig: plt.Figure
fig, ax = plt.subplots(figsize=(10, 8))
ax.set(
title=f'$\sqrt{{\\langle R(t)^2 \\rangle }}$ for different kuhn lengths $l_K$; '
f'N beads $N_b={conf.initial_system_config.system_config.n_monomers}$;',
ylabel="distance / L",
xlabel="t / step"
)
sns.lineplot(
x=df_ete_mean.index.get_level_values("t"),
y=np.sqrt(df_ete_mean["R^2"]) / L_src,
hue=(df_ete_mean["l_K"] / L_src).round(2).astype("category"),
color=kappa_colors,
ax=ax
)
ax.get_legend().set_title("$l_K/L$")
t_equilibrium = conf.simulation_config.variables["n_relax_steps"] if not CONTINUE else CONTINUE_t_equilibrium
t_equilibrium
30000000
l_b = conf.initial_system_config.system_config.bond_length
l_b
0.97
L_contour = l_b * (conf.initial_system_config.system_config.n_monomers - 1)
L_contour
61.11
Svaneborg (8)
l_ks = kremer_grest.bare_kuhn_length(np.array(kappas), l_b)
l_ks
array([ 1.85401695, 20.37000013, 39.77 , 59.17 ,
78.57 , 97.97 , 117.37 , 136.77 ])
Estimate from fit (Worm-like chain model, Hinczewski):
PATH_L_K_ESTIMATE = PATH_DATA_PROCESSED / "l_K-estimate.csv"
l_ks_estimate = pd.read_csv(PATH_L_K_ESTIMATE, index_col="kappa")
l_ks_estimate
| t | l_K | d_l_K | |
|---|---|---|---|
| kappa | |||
| 1.0 | 30000000 | 1.686888 | 0.026178 |
| 11.0 | 30000000 | 20.290522 | 0.105991 |
| 21.0 | 30000000 | 40.452802 | 0.136657 |
| 31.0 | 30000000 | 61.616077 | 0.127580 |
| 41.0 | 30000000 | 78.770816 | 0.110465 |
| ... | ... | ... | ... |
| 31.0 | 65000000 | 60.772934 | 0.111231 |
| 41.0 | 65000000 | 76.998405 | 0.134249 |
| 51.0 | 65000000 | 94.909123 | 0.248108 |
| 61.0 | 65000000 | 113.121888 | 0.179078 |
| 71.0 | 65000000 | 129.758617 | 0.332634 |
186408 rows × 3 columns
def aggregate_l_K_estimate(df: pd.DataFrame):
l_K = df["l_K"].mean()
d_l_K = df["l_K"] = df["l_K"].std() * 3
return pd.Series([l_K, d_l_K], index=["l_K", "d_l_K"])
l_ks_estimate_agg = l_ks_estimate.groupby("kappa").apply(aggregate_l_K_estimate)
l_ks_estimate_agg
| l_K | d_l_K | |
|---|---|---|
| kappa | ||
| 1.0 | 1.671224 | 0.046628 |
| 11.0 | 20.627271 | 1.062030 |
| 21.0 | 41.584460 | 4.017132 |
| 31.0 | 59.914476 | 5.426656 |
| 41.0 | 78.868018 | 6.045823 |
| 51.0 | 100.996739 | 8.289396 |
| 61.0 | 116.013684 | 10.383716 |
| 71.0 | 139.463155 | 12.719022 |
l_ks_estimate_agg / L_contour
| l_K | d_l_K | |
|---|---|---|
| kappa | ||
| 1.0 | 0.027348 | 0.000763 |
| 11.0 | 0.337543 | 0.017379 |
| 21.0 | 0.680485 | 0.065736 |
| 31.0 | 0.980437 | 0.088801 |
| 41.0 | 1.290591 | 0.098933 |
| 51.0 | 1.652704 | 0.135647 |
| 61.0 | 1.898440 | 0.169918 |
| 71.0 | 2.282166 | 0.208133 |
Difference of analytical l_K and estimated l_K relative to analytical l_K
np.abs(l_ks - l_ks_estimate_agg["l_K"]) / l_ks
kappa 1.0 0.098593 11.0 0.012630 21.0 0.045624 31.0 0.012582 41.0 0.003793 51.0 0.030895 61.0 0.011556 71.0 0.019691 Name: l_K, dtype: float64
Estimate $N_K = L / l_K$ as in Svaneborg (6)
N_Ks = (L_contour / l_ks)
N_Ks
array([32.96086365, 2.99999998, 1.53658537, 1.03278689, 0.77777778,
0.62376238, 0.52066116, 0.44680851])
Estimate $\langle R^2 \rangle$ as average over ensemble and then over time in equilibrium
df_ete_sq_t_mean_kappas = transform.calculate_ete_sq_t_avg_df_kappas(df_ete_mean, t_equilibrium)
df_ete_sq_t_mean_kappas
| R^2 | |
|---|---|
| kappa | |
| 1.0 | 113.404773 |
| 11.0 | 1054.448004 |
| 21.0 | 1721.416559 |
| 31.0 | 2099.449019 |
| 41.0 | 2369.805322 |
| 51.0 | 2589.951859 |
| 61.0 | 2708.717934 |
| 71.0 | 2844.979417 |
df_kuhn_summary = pd.DataFrame({
"R^2": df_ete_sq_t_mean_kappas["R^2"],
"N_K": N_Ks,
"l_K": l_ks
}, index=df_ete_sq_t_mean_kappas.index)
df_kuhn_summary
| R^2 | N_K | l_K | |
|---|---|---|---|
| kappa | |||
| 1.0 | 113.404773 | 32.960864 | 1.854017 |
| 11.0 | 1054.448004 | 3.000000 | 20.370000 |
| 21.0 | 1721.416559 | 1.536585 | 39.770000 |
| 31.0 | 2099.449019 | 1.032787 | 59.170000 |
| 41.0 | 2369.805322 | 0.777778 | 78.570000 |
| 51.0 | 2589.951859 | 0.623762 | 97.970000 |
| 61.0 | 2708.717934 | 0.520661 | 117.370000 |
| 71.0 | 2844.979417 | 0.446809 | 136.770000 |
fig, ax = plt.subplots(figsize=(8, 6))
sns.scatterplot(
x=df_kuhn_summary["l_K"] / L_contour,
y=df_kuhn_summary["R^2"] / L_contour ** 2,
ax=ax,
label="Simulation"
)
sns.lineplot(
x=df_kuhn_summary["l_K"] / L_contour,
y=(df_kuhn_summary["l_K"] ** 2 * N_Ks) / L_contour ** 2,
label=r"$\langle R^2 \rangle = l_K^2 N_K$",
color="orange"
)
l_ps = df_kuhn_summary["l_K"] / 2
Rs_wl_chain = 2*l_ps*L_contour - 2*l_ps**2 * (1 - np.exp(-L_contour / l_ps))
sns.lineplot(
x=df_kuhn_summary["l_K"] / L_contour,
y=Rs_wl_chain / L_contour ** 2,
label=r"$\langle R^2 \rangle$ worm-like chain",
color="green",
linestyle="--"
)
ax.set(
title=r"$\langle R^2 \rangle$ in equilibrium vs kuhn length",
xlabel="$l_K / L$",
ylabel=r"$\frac{\langle R^2 \rangle}{L^2}$"
)
ax.legend(fontsize=15)
ax.xaxis.get_label().set_fontsize(15)
ax.yaxis.get_label().set_fontsize(18)
df_kuhn_summary["l_K"] ** 2 * df_kuhn_summary["N_K"] / df_kuhn_summary["R^2"]
kappa 1.0 0.999067 11.0 1.180533 21.0 1.411828 31.0 1.722299 41.0 2.026079 51.0 2.311605 61.0 2.647925 71.0 2.937812 dtype: float64
import polyflexmd.data_analysis.transform.msd as msd
PATH_MSD = PATH_DATA_PROCESSED / "msd.csv"
PATH_MSD
PosixPath('/beegfs/ws/0/s4610340-polyflexmd/data/experiment_results/FENE-beadspring-vary-l_K/4-FENE-beadspring-vary-l_K/538accb2/data/processed/msd.csv')
if PATH_MSD.exists():
df_msd = pd.read_csv(PATH_MSD, index_col="t")
else:
df_msd = msd.calculate_msd_df(df_ete, ["kappa"])
df_msd["t/LJ"] = df_msd.index.map(lambda t: t * 0.0025)
df_msd["t/LJ"] = df_msd["t/LJ"] - df_msd["t/LJ"].min()
df_msd["l_K"] = kremer_grest.bare_kuhn_length(df_msd["kappa"], l_b=l_b)
df_msd
| dR^2 | delta dR^2 | kappa | t/LJ | l_K | |
|---|---|---|---|---|---|
| t | |||||
| 30000000 | 0.000000 | 0.000000 | 1.0 | 0.00 | 1.854017 |
| 30000100 | 0.131123 | 0.015256 | 1.0 | 0.25 | 1.854017 |
| 30000200 | 0.469626 | 0.050481 | 1.0 | 0.50 | 1.854017 |
| 30000300 | 0.925409 | 0.098095 | 1.0 | 0.75 | 1.854017 |
| 30000400 | 1.451558 | 0.156999 | 1.0 | 1.00 | 1.854017 |
| ... | ... | ... | ... | ... | ... |
| 64960000 | 2170.061793 | 235.428868 | 71.0 | 87400.00 | 136.770000 |
| 64970000 | 2165.468234 | 236.485894 | 71.0 | 87425.00 | 136.770000 |
| 64980000 | 2169.143304 | 236.389463 | 71.0 | 87450.00 | 136.770000 |
| 64990000 | 2167.883654 | 235.034069 | 71.0 | 87475.00 | 136.770000 |
| 65000000 | 2192.788809 | 234.926677 | 71.0 | 87500.00 | 136.770000 |
186408 rows × 5 columns
def plot_msd(df: pd.DataFrame, time_col: str = "t", time_label="t / step", log_scale: bool = False):
df = df.reset_index()
ax: plt.Axes
fig: plt.Figure
fig, ax = plt.subplots(figsize=(10, 8))
ax.set(
title=f'Empirical $ \sqrt {{\langle (\Delta R(t))^2 \\rangle}} $ for different kuhn lengths $l_K$;\n'
f'$N_b={conf.initial_system_config.system_config.n_monomers};$'
f'$\zeta_e=\zeta=1.0$',
ylabel="distance / L",
xlabel=time_label
)
for i, (kappa, df_kappa) in enumerate(df.groupby("kappa")):
y = np.sqrt(df_kappa["dR^2"]) / L_contour
dy = df_kappa["delta dR^2"] / (np.sqrt(df_kappa["dR^2"]) * L_contour * 2)
ax.plot(
df_kappa[time_col],
y,
c=kappa_colors[i],
label=f"{kremer_grest.bare_kuhn_length(kappa, l_b) / L_contour : .2f}",
#path_effects=[pe.Stroke(linewidth=2, foreground='black'), pe.Normal()]
)
ax.fill_between(
x=df_kappa[time_col],
y1=y - dy,
y2=y + dy,
color=kappa_colors[i],
alpha=.1,
linewidth=0
)
ax.legend()
ax.get_legend().set_title("$l_K/L$")
if log_scale:
ax.set(xscale="log", yscale="log")
return ax
plot_msd(df_msd)
<Axes: title={'center': 'Empirical $ \\sqrt {\\langle (\\Delta R(t))^2 \\rangle} $ for different kuhn lengths $l_K$;\n$N_b=64;$$\\zeta_e=\\zeta=1.0$'}, xlabel='t / step', ylabel='distance / L'>
ax = plot_msd(df_msd, time_col="t/LJ", time_label="t / LJ", log_scale=True)
_ = ax.set(
yscale="log",
xscale="log",
title=f'Empirical $ \sqrt {{\langle (\Delta R(t))^2 \\rangle}} $ for different kuhn lengths $l_K$;\n'
f'$N_b={conf.initial_system_config.system_config.n_monomers};$'
f'$\zeta_e=\zeta=1.0$',
)
PATH_ETE_MAIN_AX = PATH_DATA_PROCESSED / "ete_main_ax.csv"
if PATH_ETE_MAIN_AX.exists():
print("Reading ...")
df_ete_main_ax_frame = pd.read_csv(PATH_ETE_MAIN_AX, index_col="t")
else:
print("Processing ...")
df_ete_main_ax_frame = transform.change_basis_df_ete(df_ete, df_main_axis)
df_ete_main_ax_frame.to_csv(PATH_ETE_MAIN_AX, index=True)
df_ete_main_ax_frame
Processing ...
| R_x | R_y | R_z | R | t/LJ | R^2 | |||
|---|---|---|---|---|---|---|---|---|
| kappa | molecule-ID | t | ||||||
| 1.0 | 1 | 30000000 | 7.352342 | -5.873343 | 6.472676 | 11.421411 | 75000.0 | 130.448641 |
| 11.0 | 1 | 30000000 | 0.301723 | 6.699914 | 6.187263 | 9.124806 | 75000.0 | 83.262085 |
| 21.0 | 1 | 30000000 | 14.893539 | 20.632757 | -17.874411 | 31.096989 | 75000.0 | 967.022725 |
| 31.0 | 1 | 30000000 | 40.844770 | 7.508727 | 6.804286 | 42.082950 | 75000.0 | 1770.974681 |
| 41.0 | 1 | 30000000 | -10.612842 | -9.343206 | 50.099404 | 52.056490 | 75000.0 | 2709.878151 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 31.0 | 500 | 65000000 | -46.747730 | -22.830154 | 3.035810 | 52.113170 | 162500.0 | 2715.782487 |
| 41.0 | 500 | 65000000 | 23.882994 | 1.803430 | -5.087860 | 24.485426 | 162500.0 | 599.536086 |
| 51.0 | 500 | 65000000 | 38.260428 | -4.190383 | 40.148024 | 55.617300 | 162500.0 | 3093.284059 |
| 61.0 | 500 | 65000000 | -20.486125 | 4.428006 | 49.540542 | 53.791763 | 162500.0 | 2893.553767 |
| 71.0 | 500 | 65000000 | 29.002865 | -10.960685 | 46.222175 | 55.657818 | 162500.0 | 3097.792705 |
93204000 rows × 6 columns
df_msd_by_dim = msd.calculate_msd_by_dimension_df(
df_ete=df_ete_main_ax_frame,
group_by_params=["kappa"]
)
df_msd_by_dim["t/LJ"] = df_msd_by_dim.index.get_level_values("t").map(lambda t: t * 0.0025)
df_msd_by_dim["t/LJ"] = df_msd_by_dim["t/LJ"] - df_msd_by_dim["t/LJ"].min()
df_msd_by_dim
| dR_x^2 | dR_y^2 | dR_z^2 | delta dR_x^2 | delta dR_y^2 | delta dR_z^2 | dR^2 | kappa | t/LJ | |
|---|---|---|---|---|---|---|---|---|---|
| t | |||||||||
| 30000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 1.0 | 0.00 |
| 30000100 | 0.043648 | 0.043845 | 0.043629 | 0.008196 | 0.008455 | 0.008284 | 0.131123 | 1.0 | 0.25 |
| 30000200 | 0.160976 | 0.158379 | 0.150270 | 0.030678 | 0.028299 | 0.027057 | 0.469626 | 1.0 | 0.50 |
| 30000300 | 0.328723 | 0.305916 | 0.290770 | 0.062211 | 0.053110 | 0.053173 | 0.925409 | 1.0 | 0.75 |
| 30000400 | 0.529745 | 0.471166 | 0.450647 | 0.101122 | 0.082558 | 0.084038 | 1.451558 | 1.0 | 1.00 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 64960000 | 968.901091 | 864.891465 | 336.269238 | 160.643409 | 156.671159 | 70.279715 | 2170.061793 | 71.0 | 87400.00 |
| 64970000 | 958.747908 | 866.253213 | 340.467113 | 158.797716 | 159.283156 | 71.459793 | 2165.468234 | 71.0 | 87425.00 |
| 64980000 | 959.808591 | 870.472146 | 338.862566 | 160.472304 | 158.438912 | 69.557689 | 2169.143304 | 71.0 | 87450.00 |
| 64990000 | 951.824845 | 881.303638 | 334.755171 | 159.542612 | 158.422890 | 68.331356 | 2167.883654 | 71.0 | 87475.00 |
| 65000000 | 958.735727 | 881.134346 | 352.918737 | 159.537063 | 157.088799 | 72.783077 | 2192.788809 | 71.0 | 87500.00 |
186408 rows × 9 columns
import matplotlib.transforms as mtransforms
fig: plt.Figure
fig, axs = plt.subplots(nrows=2, ncols=4, figsize=(2 * len(kappas), 8), sharex="all", sharey="all",
layout="constrained")
axs = axs.flatten()
dimensions = ["x", "y", "z"]
for (kappa, df_g), ax in zip(df_msd_by_dim.groupby("kappa"), axs.flat):
for dim in dimensions:
l_K = kremer_grest.bare_kuhn_length(kappa, l_b) / L_contour
y = np.sqrt(df_g[f"dR_{dim}^2"]) / L_contour
dy = df_g[f"delta dR_{dim}^2"] / (np.sqrt(df_g[f"dR_{dim}^2"]) * L_contour * 2)
label = f"$l_K / L = {l_K : .2f}$"
trans = mtransforms.ScaledTranslation(5 / 72, -5 / 72, fig.dpi_scale_trans)
ax.text(0.0, 1.0, label, transform=ax.transAxes + trans,
fontsize='large', verticalalignment='top',
bbox=dict(facecolor='0.8', edgecolor='0.9'))
ax.plot(
df_g["t/LJ"],
y,
label=dim,
#path_effects=[pe.Stroke(linewidth=2, foreground='black'), pe.Normal()]
)
ax.fill_between(
x=df_g["t/LJ"],
y1=y - dy,
y2=y + dy,
alpha=.2,
linewidth=0
)
ax.legend().set_visible(False)
fig.legend(*axs[-1].get_legend_handles_labels(), ncol=3, loc='lower center', bbox_to_anchor=(0.5, -0.06))
fig.supxlabel("t / LJ")
fig.supylabel("$ \sqrt {{\langle (\Delta R(t))^2 \\rangle}} / L$")
fig.suptitle(f"MSD for different dimensions; z || 1. bond; $\zeta_e = \zeta = 1.0$; L = {L_contour : .2f}")
Text(0.5, 0.98, 'MSD for different dimensions; z || 1. bond; $\\zeta_e = \\zeta = 1.0$; L = 61.11')
fig: plt.Figure
fig, axs = plt.subplots(nrows=2, ncols=4, figsize=(2 * len(kappas), 8), sharex="all", sharey="all",
layout="constrained")
axs = axs.flatten()
dimensions = ["x", "y", "z"]
for (kappa, df_g), ax in zip(df_msd_by_dim.groupby("kappa"), axs.flat):
for dim in dimensions:
l_K = kremer_grest.bare_kuhn_length(kappa, l_b) / L_contour
y = np.sqrt(df_g[f"dR_{dim}^2"]) / L_contour
dy = df_g[f"delta dR_{dim}^2"] / (np.sqrt(df_g[f"dR_{dim}^2"]) * L_contour * 2)
label = f"$l_K / L = {l_K : .2f}$"
trans = mtransforms.ScaledTranslation(5 / 72, -5 / 72, fig.dpi_scale_trans)
ax.text(0.0, 1.0, label, transform=ax.transAxes + trans,
fontsize='large', verticalalignment='top',
bbox=dict(facecolor='0.8', edgecolor='0.9'))
ax.plot(
df_g["t/LJ"],
y,
label=dim,
#path_effects=[pe.Stroke(linewidth=2, foreground='black'), pe.Normal()]
)
ax.fill_between(
x=df_g["t/LJ"],
y1=y - dy,
y2=y + dy,
alpha=.2,
linewidth=0
)
ax.legend().set_visible(False)
ax.set(xscale="log", yscale="log")
fig.legend(*axs[-1].get_legend_handles_labels(), ncol=3, loc='lower center', bbox_to_anchor=(0.5, -0.06))
fig.supxlabel("t / LJ")
fig.supylabel("$ \sqrt {{\langle (\Delta R(t))^2 \\rangle}} / L$")
fig.suptitle(f"MSD for different dimensions; z || 1. bond; $\zeta_e = \zeta = 1.0$; L = {L_contour : .2f}")
Text(0.5, 0.98, 'MSD for different dimensions; z || 1. bond; $\\zeta_e = \\zeta = 1.0$; L = 61.11')
fig: plt.Figure
fig, axs = plt.subplots(ncols=len(dimensions), nrows=1, figsize=(18, 5), sharey="all", layout="constrained")
axs = axs.flatten()
def _plot(ax, dim):
for i, (kappa, df_kappa) in enumerate(df_msd_by_dim.groupby("kappa")):
y = np.sqrt(df_kappa[f"dR_{dim}^2"]) / L_contour
dy = df_kappa[f"delta dR_{dim}^2"] / (np.sqrt(df_kappa["dR^2"]) * L_contour * 2)
l_K = kremer_grest.bare_kuhn_length(kappa, l_b) / L_contour
ax.plot(
df_kappa["t/LJ"],
y,
c=kappa_colors[i],
label=f"$l_K/L = {l_K : .2f}$",
linewidth=.9
)
ax.fill_between(
x=df_kappa["t/LJ"],
y1=y - dy,
y2=y + dy,
color=kappa_colors[i],
alpha=.3,
linewidth=0
)
ax.set(title=f"{dim}")
for dim, ax in zip(dimensions, axs):
_plot(ax, dim)
_ = fig.legend(*axs[-1].get_legend_handles_labels(), ncol=8, loc='lower center', bbox_to_anchor=(0.5, -0.08))
fig.suptitle(f"MSD for different dimensions; z || 1. bond; $\zeta_e = \zeta = 1.0$; L = {L_contour : .2f}")
fig.supxlabel("t/LJ")
fig.supylabel("distance/L")
Text(0.02, 0.5, 'distance/L')
fig: plt.Figure
fig, axs = plt.subplots(ncols=len(dimensions), nrows=1, figsize=(18, 5), sharey="all", layout="constrained")
axs = axs.flatten()
def _plot(ax, dim):
for i, (kappa, df_kappa) in enumerate(df_msd_by_dim.groupby("kappa")):
y = np.sqrt(df_kappa[f"dR_{dim}^2"]) / L_contour
dy = df_kappa[f"delta dR_{dim}^2"] / (np.sqrt(df_kappa["dR^2"]) * L_contour * 2)
l_K = kremer_grest.bare_kuhn_length(kappa, l_b) / L_contour
ax.plot(
df_kappa["t/LJ"],
y,
c=kappa_colors[i],
label=f"$l_K/L = {l_K : .2f}$",
linewidth=.9
)
ax.fill_between(
x=df_kappa["t/LJ"],
y1=y - dy,
y2=y + dy,
color=kappa_colors[i],
alpha=.3,
linewidth=0
)
ax.set(title=f"{dim}")
ax.set(xscale="log", yscale="log")
for dim, ax in zip(dimensions, axs):
_plot(ax, dim)
_ = fig.legend(*axs[-1].get_legend_handles_labels(), ncol=8, loc='lower center', bbox_to_anchor=(0.5, -0.08))
fig.suptitle(f"MSD for different dimensions; z || 1. bond; $\zeta_e = \zeta = 1.0$; L = {L_contour : .2f}")
fig.supxlabel("t/LJ")
fig.supylabel("distance/L")
Text(0.02, 0.5, 'distance/L')
Svaneborg (15)
Rouse relaxation time: $$ \tau_R = \frac{1}{3 \pi^2} \frac{\zeta_K N_K l_K^2}{k_B T} = \frac{1}{3 \pi^2} \frac{\zeta N_b N_K l_K^2}{k_B T}$$ Relaxation time of single bead: $$ \tau_0 = \frac{3 \pi^2 \tau_R}{N_K^2} $$
zeta = 1 # LJ
T = 1
k_B = 1
alpha = 4.047
rouse_times_analytical = []
for i, (l_K, N_K, R_sq) in enumerate(zip(df_kuhn_summary["l_K"], df_kuhn_summary["N_K"], df_kuhn_summary["R^2"])):
tau_R_analytical = zeta * conf.initial_system_config.system_config.n_monomers * N_K * l_K ** 2 / (
3 * np.pi ** 2 * k_B * T)
tau_R_analytical_corrected = tau_R_analytical * alpha
rouse_times_analytical.append(tau_R_analytical_corrected)
df_rouse_times_theory = pd.DataFrame({
"l_K": df_kuhn_summary["l_K"],
"N_K": df_kuhn_summary["N_K"],
"Theory tau_R": rouse_times_analytical,
"Theory tau_0": (rouse_times_analytical / df_kuhn_summary["N_K"] ** 2) * 3 * np.pi**2
}, index=df_kuhn_summary.index)
df_rouse_times_theory
| l_K | N_K | Theory tau_R | Theory tau_0 | |
|---|---|---|---|---|
| kappa | ||||
| 1.0 | 1.854017 | 32.960864 | 991.101568 | 2.701108e+01 |
| 11.0 | 20.370000 | 3.000000 | 10889.187947 | 3.582399e+04 |
| 21.0 | 39.770000 | 1.536585 | 21259.842998 | 2.666046e+05 |
| 31.0 | 59.170000 | 1.032787 | 31630.498119 | 8.780224e+05 |
| 41.0 | 78.570000 | 0.777778 | 42001.153240 | 2.055754e+06 |
| 51.0 | 97.970000 | 0.623762 | 52371.808361 | 3.985476e+06 |
| 61.0 | 117.370000 | 0.520661 | 62742.463482 | 6.852866e+06 |
| 71.0 | 136.770000 | 0.446809 | 73113.118603 | 1.084360e+07 |
import matplotlib.patheffects as pe
fig: plt.Figure
fig, axs = plt.subplots(nrows=2, ncols=4, figsize=(2 * len(kappas), 8), sharex="all", sharey="all",
layout="constrained")
axs = axs.flatten()
for i, (kappa, df_g) in enumerate(df_msd.groupby("kappa")):
ax = axs[i]
l_K = kremer_grest.bare_kuhn_length(kappa, l_b) / L_contour
y = np.sqrt(df_g[f"dR^2"]) / L_contour
dy = df_g[f"delta dR^2"] / (np.sqrt(df_g[f"dR^2"]) * L_contour * 2)
label = f"$l_K / L = {l_K : .2f}$"
trans = matplotlib.transforms.ScaledTranslation(5 / 72, -5 / 72, fig.dpi_scale_trans)
ax.text(0.0, 1.0, label, transform=ax.transAxes + trans,
fontsize='large', verticalalignment='top',
bbox=dict(facecolor='0.8', edgecolor='0.9'))
ax.plot(
df_g["t/LJ"],
y,
label="Empirical",
linewidth=3
)
ax.fill_between(
x=df_g["t/LJ"],
y1=y - dy,
y2=y + dy,
alpha=.2,
linewidth=0
)
N_K = df_kuhn_summary.loc[kappa]["N_K"]
tau_R = df_rouse_times_theory.loc[kappa]["Theory tau_R"]
p_max = 1 if N_K < 1 else int(round(N_K))
rouse_predictions = rouse.rouse_g_4(
t=df_g["t/LJ"],
tau_R=tau_R,
p_max=p_max,
N_b=N_K,
l_b=l_K * L_contour
)
ax.plot(
df_g["t/LJ"],
np.sqrt(rouse_predictions) / L_contour,
linestyle="dashed",
path_effects=[pe.Stroke(linewidth=3, foreground='black'), pe.Normal()],
label="Rouse model",
color="orange",
)
ax.legend().set_visible(False)
fig.legend(*axs[-1].get_legend_handles_labels(), ncol=3, loc='lower center', bbox_to_anchor=(0.5, -0.06))
fig.supxlabel("t / LJ")
fig.supylabel("$ \sqrt {{\langle (\Delta R(t))^2 \\rangle}} / L$")
fig.suptitle(
f"Comparison empirical MSD with Rouse model for different kuhn lengths $l_K$; $\zeta_e = \zeta = 1.0$; L = {L_contour : .2f}")
Text(0.5, 0.98, 'Comparison empirical MSD with Rouse model for different kuhn lengths $l_K$; $\\zeta_e = \\zeta = 1.0$; L = 61.11')
df_rouse_times_fit = []
for i, (kappa, df_ete_change_kappas_equi_kappa) in enumerate(df_msd.groupby("kappa")):
l_K = df_kuhn_summary.loc[kappa]["l_K"]
N_K = df_kuhn_summary.loc[kappa]["N_K"]
popt, pcov = scipy.optimize.curve_fit(
functools.partial(rouse.rouse_g_4, N_b=N_K, l_b=l_K, p_max=conf.initial_system_config.system_config.n_monomers),
df_ete_change_kappas_equi_kappa.loc[df_ete_change_kappas_equi_kappa["t/LJ"] < 1e4]["t/LJ"],
df_ete_change_kappas_equi_kappa[df_ete_change_kappas_equi_kappa["t/LJ"] < 1e4]["dR^2"],
p0=(10 ** (i + 1),) if i != 0 else 100
)
df_rouse_times_fit.append((kappa, popt[0], np.sqrt(np.diag(pcov))[0], l_K, N_K))
df_rouse_times_fit = pd.DataFrame(
df_rouse_times_fit,
columns=["kappa", "Empirical tau_R", "Empirical Delta tau_R", "l_K", "N_K"]
).set_index("kappa")
df_rouse_times_fit["Empirical tau_0"] = df_rouse_times_fit["Empirical tau_R"] / df_rouse_times_fit["N_K"] ** 2
df_rouse_times_fit["Empirical Delta tau_0"] = df_rouse_times_fit["Empirical Delta tau_R"] / df_rouse_times_fit[
"N_K"] ** 2
df_rouse_times_fit
| Empirical tau_R | Empirical Delta tau_R | l_K | N_K | Empirical tau_0 | Empirical Delta tau_0 | |
|---|---|---|---|---|---|---|
| kappa | ||||||
| 1.0 | 953.318324 | 0.719159 | 1.854017 | 32.960864 | 8.774872e-01 | 0.000662 |
| 11.0 | 17978.154608 | 15.117218 | 20.370000 | 3.000000 | 1.997573e+03 | 1.679691 |
| 21.0 | 61946.513031 | 63.345018 | 39.770000 | 1.536585 | 2.623635e+04 | 26.828666 |
| 31.0 | 130929.255556 | 123.814540 | 59.170000 | 1.032787 | 1.227482e+05 | 116.078081 |
| 41.0 | 264613.191612 | 333.798881 | 78.570000 | 0.777778 | 4.374218e+05 | 551.789987 |
| 51.0 | 445213.323751 | 527.801071 | 97.970000 | 0.623762 | 1.144273e+06 | 1356.537849 |
| 61.0 | 640515.582079 | 755.887583 | 117.370000 | 0.520661 | 2.362759e+06 | 2788.347215 |
| 71.0 | 903600.632731 | 1224.359624 | 136.770000 | 0.446809 | 4.526199e+06 | 6132.903424 |
fig: plt.Figure
fig, axs = plt.subplots(nrows=2, ncols=4, figsize=(2 * len(kappas), 8), sharex="all", sharey="all",
layout="constrained")
axs = axs.flatten()
for i, (kappa, df_g) in enumerate(df_msd.groupby("kappa")):
ax = axs[i]
l_K = kremer_grest.bare_kuhn_length(kappa, l_b)
y = np.sqrt(df_g[f"dR^2"]) / L_contour
dy = df_g[f"delta dR^2"] / (np.sqrt(df_g[f"dR^2"]) * L_contour * 2)
N_K = df_kuhn_summary.loc[kappa]["N_K"]
tau_R = df_rouse_times_fit.loc[kappa]["Empirical tau_R"]
dtau_R = df_rouse_times_fit.loc[kappa]["Empirical Delta tau_R"]
if dtau_R >= 1.0:
tau_R = round(tau_R)
dtau_R = round(dtau_R)
tau_label = f"$\\tau_R = {tau_R} \pm {dtau_R}$"
else:
tau_label = f"$\\tau_R = {tau_R:.2f} \pm {dtau_R:.2f}$"
label = f"$l_K / L = {l_K / L_contour : .2f}$\n{tau_label}"
trans = matplotlib.transforms.ScaledTranslation(5 / 72, -5 / 72, fig.dpi_scale_trans)
ax.text(0.0, 1.0, label, transform=ax.transAxes + trans,
fontsize='large', verticalalignment='top',
bbox=dict(facecolor='0.8', edgecolor='0.9'))
ax.plot(
df_g["t/LJ"],
y,
label="Empirical",
linewidth=3
)
ax.fill_between(
x=df_g["t/LJ"],
y1=y - dy,
y2=y + dy,
alpha=.2,
linewidth=0
)
rouse_fit = rouse.rouse_g_4(
t=df_g["t/LJ"],
N_b=N_K,
l_b=l_K,
p_max=conf.initial_system_config.system_config.n_monomers,
tau_R=tau_R
)
ax.plot(
df_g["t/LJ"],
np.sqrt(rouse_fit) / L_contour,
label=f"Rouse model fit",
path_effects=[pe.Stroke(linewidth=3, foreground='black'), pe.Normal()],
color="orange",
linestyle="--"
)
ax.legend().set_visible(False)
fig.legend(*axs[-1].get_legend_handles_labels(), ncol=3, loc='lower center', bbox_to_anchor=(0.5, -0.06))
fig.supxlabel("t / LJ")
fig.supylabel("$ \sqrt {{\langle (\Delta R(t))^2 \\rangle}} / L$")
fig.suptitle(
f"MSD: Simulation and fit of Rouse model with free parameter $\\tau_R$ for different kuhn lengths $l_K$; $\zeta_e = \zeta = 1.0$; L={L_contour : .2f}")
Text(0.5, 0.98, 'MSD: Simulation and fit of Rouse model with free parameter $\\tau_R$ for different kuhn lengths $l_K$; $\\zeta_e = \\zeta = 1.0$; L= 61.11')
df_rouse_times_fit = []
for i, (kappa, df_ete_change_kappas_equi_kappa) in enumerate(df_msd.groupby("kappa")):
l_K = df_kuhn_summary.loc[kappa]["l_K"]
N_K = df_kuhn_summary.loc[kappa]["N_K"]
#p_max = int(round(N_K)) if N_K > 1 else 1
p_max = 1
popt, pcov = scipy.optimize.curve_fit(
functools.partial(rouse.rouse_g_4_adj, p_max=p_max, R=df_kuhn_summary.loc[kappa]["R^2"], b=1),
df_ete_change_kappas_equi_kappa["t/LJ"],
df_ete_change_kappas_equi_kappa["dR^2"],
#sigma=df_ete_change_kappas_equi_kappa["delta dR^2"].iloc[1:],
#absolute_sigma=True,
maxfev=2000,
p0=(df_rouse_times_theory.loc[kappa]["Theory tau_R"], 2),
bounds=([0, 0], [np.inf, np.inf])
)
df_rouse_times_fit.append((kappa, popt[0], np.sqrt(np.diag(pcov))[0], l_K, N_K, popt[1], np.sqrt(np.diag(pcov))[1], 1))
df_rouse_times_fit = pd.DataFrame(
df_rouse_times_fit,
columns=["kappa", "Empirical tau_R", "Empirical Delta tau_R", "l_K", "N_K", "a", "delta a","b"]
).set_index("kappa")
df_rouse_times_fit["Empirical tau_0"] = df_rouse_times_fit["Empirical tau_R"] / df_rouse_times_fit["N_K"] ** 2
df_rouse_times_fit["Empirical Delta tau_0"] = df_rouse_times_fit["Empirical Delta tau_R"] / df_rouse_times_fit[
"N_K"] ** 2
df_rouse_times_fit
| Empirical tau_R | Empirical Delta tau_R | l_K | N_K | a | delta a | b | Empirical tau_0 | Empirical Delta tau_0 | |
|---|---|---|---|---|---|---|---|---|---|
| kappa | |||||||||
| 1.0 | 768.514273 | 1.141724 | 1.854017 | 32.960864 | 1.993612 | 0.000600 | 1 | 0.707383 | 0.001051 |
| 11.0 | 5664.140201 | 6.077726 | 20.370000 | 3.000000 | 1.685381 | 0.000912 | 1 | 629.348919 | 0.675303 |
| 21.0 | 8449.287489 | 9.230514 | 39.770000 | 1.536585 | 1.456440 | 0.000772 | 1 | 3578.546805 | 3.909421 |
| 31.0 | 9549.060213 | 13.127675 | 59.170000 | 1.032787 | 1.333883 | 0.000870 | 1 | 8952.394319 | 12.307402 |
| 41.0 | 10090.455644 | 9.636525 | 78.570000 | 0.777778 | 1.164767 | 0.000522 | 1 | 16680.140963 | 15.929766 |
| 51.0 | 9859.426103 | 10.241859 | 97.970000 | 0.623762 | 1.004755 | 0.000492 | 1 | 25340.389437 | 26.323307 |
| 61.0 | 9667.267729 | 10.427191 | 117.370000 | 0.520661 | 0.943176 | 0.000481 | 1 | 35660.989372 | 38.464224 |
| 71.0 | 8604.676571 | 7.976968 | 136.770000 | 0.446809 | 0.809243 | 0.000363 | 1 | 43101.429808 | 39.957191 |
fig: plt.Figure
fig, axs = plt.subplots(nrows=2, ncols=4, figsize=(2 * len(kappas), 8), sharex="all", sharey="all",
layout="constrained")
axs = axs.flatten()
for i, (kappa, df_g) in enumerate(df_msd.groupby("kappa")):
ax = axs[i]
l_K = kremer_grest.bare_kuhn_length(kappa, l_b)
y = np.sqrt(df_g[f"dR^2"]) / L_contour
dy = df_g[f"delta dR^2"] / (np.sqrt(df_g[f"dR^2"]) * L_contour * 2)
N_K = df_kuhn_summary.loc[kappa]["N_K"]
tau_R = df_rouse_times_fit.loc[kappa]["Empirical tau_R"]
dtau_R = df_rouse_times_fit.loc[kappa]["Empirical Delta tau_R"]
if dtau_R >= 1.0:
tau_R = round(tau_R)
dtau_R = round(dtau_R)
tau_label = f"$\\tau_R = {tau_R} \pm {dtau_R}$"
else:
tau_label = f"$\\tau_R = {tau_R:.2f} \pm {dtau_R:.2f}$"
a = df_rouse_times_fit.loc[kappa]["a"]
da = df_rouse_times_fit.loc[kappa]["delta a"]
a_label = f"$a \\approx {a:.3f}$"
label = f"$l_K / L = {l_K / L_contour : .2f}$\n{tau_label}\n{a_label}"
trans = matplotlib.transforms.ScaledTranslation(5 / 72, -5 / 72, fig.dpi_scale_trans)
if i == 0:
ax.text(0.0, 1.0, label, transform=ax.transAxes + trans,
fontsize='large', verticalalignment='top',
bbox=dict(facecolor='0.8', edgecolor='0.9'))
else:
ax.text(0.5, 0.3, label, transform=ax.transAxes + trans,
fontsize='large', verticalalignment='top',
bbox=dict(facecolor='0.8', edgecolor='0.9'))
ax.plot(
df_g["t/LJ"],
y,
label="Empirical",
linewidth=3
)
ax.fill_between(
x=df_g["t/LJ"],
y1=y - dy,
y2=y + dy,
alpha=.2,
linewidth=0
)
rouse_fit = rouse.rouse_g_4_adj(
t=df_g["t/LJ"],
R=df_kuhn_summary.loc[kappa]["R^2"],
p_max=1,
tau_R=tau_R,
a=df_rouse_times_fit.loc[kappa]["a"],
b=df_rouse_times_fit.loc[kappa]["b"]
)
ax.plot(
df_g["t/LJ"],
np.sqrt(rouse_fit) / L_contour,
label=f"Rouse model fit",
path_effects=[pe.Stroke(linewidth=3, foreground='black'), pe.Normal()],
color="orange",
linestyle="--"
)
ax.legend().set_visible(False)
fig.legend(*axs[-1].get_legend_handles_labels(), ncol=3, loc='lower center', bbox_to_anchor=(0.5, -0.06))
fig.supxlabel("t / LJ")
fig.supylabel("$ \sqrt {{\langle (\Delta R(t))^2 \\rangle}} / L$")
fig.suptitle(
f"MSD: Simulation and fit of Rouse model with free parameter $\\tau_R$ for different kuhn lengths $l_K$; $\zeta_e = \zeta = 1.0$; L={L_contour : .2f}")
Text(0.5, 0.98, 'MSD: Simulation and fit of Rouse model with free parameter $\\tau_R$ for different kuhn lengths $l_K$; $\\zeta_e = \\zeta = 1.0$; L= 61.11')
fig: plt.Figure
fig, axs = plt.subplots(nrows=2, ncols=4, figsize=(2 * len(kappas), 8), sharex="all", sharey="all",
layout="constrained")
axs = axs.flatten()
for i, (kappa, df_g) in enumerate(df_msd.groupby("kappa")):
ax = axs[i]
l_K = kremer_grest.bare_kuhn_length(kappa, l_b)
y = np.sqrt(df_g[f"dR^2"]) / L_contour
dy = df_g[f"delta dR^2"] / (np.sqrt(df_g[f"dR^2"]) * L_contour * 2)
N_K = df_kuhn_summary.loc[kappa]["N_K"]
tau_R = df_rouse_times_fit.loc[kappa]["Empirical tau_R"]
dtau_R = df_rouse_times_fit.loc[kappa]["Empirical Delta tau_R"]
if dtau_R >= 1.0:
tau_R = round(tau_R)
dtau_R = round(dtau_R)
tau_label = f"$\\tau_R = {tau_R} \pm {dtau_R}$"
else:
tau_label = f"$\\tau_R = {tau_R:.2f} \pm {dtau_R:.2f}$"
a = df_rouse_times_fit.loc[kappa]["a"]
da = df_rouse_times_fit.loc[kappa]["delta a"]
a_label = f"$a \\approx {a:.3f}$"
label = f"$l_K / L = {l_K / L_contour : .2f}$\n{tau_label}\n{a_label}"
trans = matplotlib.transforms.ScaledTranslation(5 / 72, -5 / 72, fig.dpi_scale_trans)
ax.text(0.0, 1.0, label, transform=ax.transAxes + trans,
fontsize='large', verticalalignment='top',
bbox=dict(facecolor='0.8', edgecolor='0.9'))
ax.plot(
df_g["t/LJ"],
y,
label="Empirical",
linewidth=3
)
ax.fill_between(
x=df_g["t/LJ"],
y1=y - dy,
y2=y + dy,
alpha=.2,
linewidth=0
)
rouse_fit = rouse.rouse_g_4_adj(
t=df_g["t/LJ"],
R=df_kuhn_summary.loc[kappa]["R^2"],
p_max=1,
tau_R=tau_R,
a=df_rouse_times_fit.loc[kappa]["a"],
b=df_rouse_times_fit.loc[kappa]["b"]
)
ax.plot(
df_g["t/LJ"],
np.sqrt(rouse_fit) / L_contour,
label=f"Rouse model fit",
path_effects=[pe.Stroke(linewidth=3, foreground='black'), pe.Normal()],
color="orange",
linestyle="--"
)
ax.legend().set_visible(False)
ax.set(xscale="log", yscale="log")
fig.legend(*axs[-1].get_legend_handles_labels(), ncol=3, loc='lower center', bbox_to_anchor=(0.5, -0.06))
fig.supxlabel("t / LJ")
fig.supylabel("$ \sqrt {{\langle (\Delta R(t))^2 \\rangle}} / L$")
fig.suptitle(
f"MSD: Simulation and fit of Rouse model with free parameter $\\tau_R$ for different kuhn lengths $l_K$; $\zeta_e = \zeta = 1.0$; L={L_contour : .2f}")
Text(0.5, 0.98, 'MSD: Simulation and fit of Rouse model with free parameter $\\tau_R$ for different kuhn lengths $l_K$; $\\zeta_e = \\zeta = 1.0$; L= 61.11')
df_rouse_rimes = df_rouse_times_theory.join(df_rouse_times_fit.drop(["l_K", "N_K"], axis=1))
df_rouse_rimes
| l_K | N_K | Theory tau_R | Theory tau_0 | Empirical tau_R | Empirical Delta tau_R | a | delta a | b | Empirical tau_0 | Empirical Delta tau_0 | |
|---|---|---|---|---|---|---|---|---|---|---|---|
| kappa | |||||||||||
| 1.0 | 1.854017 | 32.960864 | 991.101568 | 2.701108e+01 | 751.769838 | 1.328295 | 1.987994 | 0.000709 | 1 | 0.691971 | 0.001223 |
| 11.0 | 20.370000 | 3.000000 | 10889.187947 | 3.582399e+04 | 5362.945210 | 9.134761 | 1.652379 | 0.001465 | 1 | 595.882809 | 1.014973 |
| 21.0 | 39.770000 | 1.536585 | 21259.842998 | 2.666046e+05 | 8449.287489 | 9.230514 | 1.456440 | 0.000772 | 1 | 3578.546805 | 3.909421 |
| 31.0 | 59.170000 | 1.032787 | 31630.498119 | 8.780224e+05 | 9549.060213 | 13.127675 | 1.333883 | 0.000870 | 1 | 8952.394319 | 12.307402 |
| 41.0 | 78.570000 | 0.777778 | 42001.153240 | 2.055754e+06 | 10090.455644 | 9.636525 | 1.164767 | 0.000522 | 1 | 16680.140963 | 15.929766 |
| 51.0 | 97.970000 | 0.623762 | 52371.808361 | 3.985476e+06 | 9859.426103 | 10.241859 | 1.004755 | 0.000492 | 1 | 25340.389437 | 26.323307 |
| 61.0 | 117.370000 | 0.520661 | 62742.463482 | 6.852866e+06 | 9667.267729 | 10.427191 | 0.943176 | 0.000481 | 1 | 35660.989372 | 38.464224 |
| 71.0 | 136.770000 | 0.446809 | 73113.118603 | 1.084360e+07 | 8604.676571 | 7.976968 | 0.809243 | 0.000363 | 1 | 43101.429808 | 39.957191 |
ax: plt.Axes
fig: plt.Figure
fig, ax = plt.subplots()
ax.errorbar(
x=df_rouse_rimes["l_K"] / L_contour,
y=df_rouse_rimes["Empirical tau_0"],
yerr=df_rouse_rimes["Empirical Delta tau_0"],
linestyle="",
marker="o",
label="Empirical",
)
<ErrorbarContainer object of 3 artists>
import matplotlib.container
import matplotlib.collections
ax: plt.Axes
fig: plt.Figure
fig, ax = plt.subplots(figsize=(8, 7))
errorbar_container: matplotlib.container.ErrorbarContainer = ax.errorbar(
x=df_rouse_rimes["l_K"] / L_contour,
y=df_rouse_rimes["Empirical tau_0"],
yerr=df_rouse_rimes["Empirical Delta tau_0"],
linestyle="",
marker="o",
label="Empirical",
)
ax.set(
xlabel="$l_K/L$",
ylabel="$\\tau_0$ / LJ",
title="Compare theoretical and empirical $\\tau_0$"
)
path_coll: matplotlib.collections.PathCollection = ax.scatter(
x=df_rouse_rimes["l_K"] / L_contour,
y=df_rouse_rimes["Theory tau_0"],
linestyle="",
marker="o",
label="Theoretical",
color="orange",
)
ax.legend()
<matplotlib.legend.Legend at 0x2ac2c8a24cd0>
ax: plt.Axes
fig: plt.Figure
fig, ax = plt.subplots(figsize=(8, 7))
errorbar_container: matplotlib.container.ErrorbarContainer = ax.errorbar(
x=df_rouse_rimes["l_K"] / L_contour,
y=df_rouse_rimes["Empirical tau_0"] - df_rouse_rimes["Theory tau_0"],
yerr=df_rouse_rimes["Empirical Delta tau_0"],
linestyle="",
marker="o",
label="Empirical - Theory",
)
ax.set(
xlabel="$l_K/L$",
ylabel="$\\Delta \\tau_0$ / LJ",
title="Empirical $\\tau_0$ - Theoretical $\\tau_0$"
)
[Text(0.5, 0, '$l_K/L$'), Text(0, 0.5, '$\\Delta \\tau_0$ / LJ'), Text(0.5, 1.0, 'Empirical $\\tau_0$ - Theoretical $\\tau_0$')]
import plotly
plotly.offline.init_notebook_mode(connected=False)